From 8fee2ad1939e6683460fa428657d768023426cd8 Mon Sep 17 00:00:00 2001 From: lemon24 Date: Sun, 8 Dec 2024 11:53:10 +0200 Subject: [PATCH] Add reader.utils.archive_entries(). #290 --- CHANGES.rst | 7 ++- docs/api.rst | 7 +++ src/reader/_app/__init__.py | 9 +++ src/reader/_app/templates/entries.html | 3 + src/reader/core.py | 6 +- src/reader/utils.py | 78 +++++++++++++++++++++++++ tests/{test_utils.py => test__utils.py} | 0 tests/test_reader_utils.py | 59 +++++++++++++++++++ 8 files changed, 166 insertions(+), 3 deletions(-) create mode 100644 src/reader/utils.py rename tests/{test_utils.py => test__utils.py} (100%) create mode 100644 tests/test_reader_utils.py diff --git a/CHANGES.rst b/CHANGES.rst index 2d025fc6..d44d3a24 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -18,7 +18,12 @@ Unreleased instead of feed :attr:`~Feed.user_title` or :attr:`~Feed.title`. * Allow filtering entries by the entry source. -* Add :meth:`~Reader.copy_entry`. (:issue:`290`) +* Add :func:`reader.utils.archive_entries` for + copying entries to an "archived" feed. + (:issue:`290`) + + * Add :meth:`~Reader.copy_entry`. + * Allow archiving entries from the web app. * Fix bug causing :class:`Reader` operations from a thread other than the one that created the instance diff --git a/docs/api.rst b/docs/api.rst index 55235ac9..a9d187bc 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -190,3 +190,10 @@ Constants .. autodata:: reader.core.DEFAULT_RESERVED_NAME_SCHEME .. autodata:: reader.plugins.DEFAULT_PLUGINS + + + +Utilities +--------- + +.. autofunction:: reader.utils.archive_entries diff --git a/src/reader/_app/__init__.py b/src/reader/_app/__init__.py index 977e60c5..622b066c 100644 --- a/src/reader/_app/__init__.py +++ b/src/reader/_app/__init__.py @@ -38,6 +38,7 @@ from reader._plugins import Loader from reader.types import _get_entry_content from reader.types import TristateFilterInput +from reader.utils import archive_entries from .api_thing import APIError from .api_thing import APIThing @@ -630,6 +631,14 @@ def mark_all_as_unread(data): get_reader().mark_entry_as_unread((feed_url, entry_id)) +@form_api(really=True) +@readererror_to_apierror() +def archive_all(data): + feed_url = data['feed-url'] + entry_ids = json.loads(data['entry-id']) + archive_entries(get_reader(), [(feed_url, eid) for eid in entry_ids]) + + @form_api @readererror_to_apierror() def mark_as_important(data): diff --git a/src/reader/_app/templates/entries.html b/src/reader/_app/templates/entries.html index cc77d1a1..0b08da1f 100644 --- a/src/reader/_app/templates/entries.html +++ b/src/reader/_app/templates/entries.html @@ -172,6 +172,9 @@ {{- macros.confirm_button('.form_api', 'mark-all-as-unread', 'all unread', leave_disabled=true, next=next, context={'feed-url': feed.url, 'entry-id': entries_data | tojson}) -}} {% endif %} + {{- macros.confirm_button('.form_api', 'archive-all', 'archive all', leave_disabled=true, next=url_for('.entries', feed='reader:archived'), context={'feed-url': feed.url, 'entry-id': entries_data | tojson}) -}} + {# TODO: should redirect to archived # entry #} + {{ macros.confirm_button('.form_api', 'delete-feed', 'delete feed', leave_disabled=true, next=url_for('.entries'), context={'feed-url': feed.url}) }} {{ macros.text_input_button('.form_api', 'update-feed-title', 'update feed title', 'feed-title', 'feed title', leave_disabled=true, next=next, context={'feed-url': feed.url}, value=feed.user_title) }} diff --git a/src/reader/core.py b/src/reader/core.py index c6338e21..a888786c 100644 --- a/src/reader/core.py +++ b/src/reader/core.py @@ -1540,7 +1540,7 @@ def add_entry(self, entry: Any, /) -> None: Raises: EntryExistsError: If an entry with the same id already exists. - FeedNotFoundError + FeedNotFoundError: If the feed does not exist. StorageError .. versionadded:: 2.5 @@ -1627,7 +1627,7 @@ def copy_entry(self, src: EntryInput, dst: EntryInput, /) -> None: Raises: EntryExistsError: If an entry with the same id as dst already exists. - FeedNotFoundError + FeedNotFoundError: If the dst feed does not exist. StorageError .. versionadded:: 3.16 @@ -1637,6 +1637,8 @@ def copy_entry(self, src: EntryInput, dst: EntryInput, /) -> None: recent_sort = self._storage.get_entry_recent_sort(src_entry.resource_id) dst_resource_id = _entry_argument(dst) + # FIXME: do not allow copy to the same feed (or at least entry) + attrs = dict(src_entry.__dict__) attrs['feed_url'], attrs['id'] = dst_resource_id diff --git a/src/reader/utils.py b/src/reader/utils.py new file mode 100644 index 00000000..2da9047d --- /dev/null +++ b/src/reader/utils.py @@ -0,0 +1,78 @@ +"""Too specific to be in core, too small to have dedicated modules.""" + +from collections.abc import Collection +from urllib.parse import urlencode + +from . import EntryExistsError +from . import FeedExistsError +from . import Reader +from .types import _entry_argument +from .types import EntryInput + + +def archive_entries( + reader: Reader, + entries: Collection[EntryInput], + /, + feed_url: str = 'reader:archived', + feed_user_title: str | None = 'Archived', +) -> None: + """Copy a list of entries to a special "archived" feed. + + Entries that are already in the archived feed will be overwritten. + + The original entries will remain unchanged. + + Args: + reader (Reader): A reader instance. + entries (list(tuple(str, str) or Entry)): Entries to be archived. + feed_url (str): + The URL of the archived feed. + If the feed does not exist, it will be created. + feed_user_title (str or None): + :attr:`~.Feed.user_title` for the archived feed. + + Raises: + EntryExistsError: If any of the entries does not exist. + StorageError + + .. versionadded:: 3.16 + + """ + entry_ids = [_entry_argument(e) for e in entries] + + try: + reader.add_feed(feed_url, allow_invalid_url=True) + reader.disable_feed_updates(feed_url) + except FeedExistsError: + pass + reader.set_feed_user_title(feed_url, feed_user_title) + + for src in entry_ids: + dst = feed_url, _make_archived_entry_id(feed_url, src) + try: + reader.copy_entry(src, dst) + except EntryExistsError: + reader.delete_entry(dst) + reader.copy_entry(src, dst) + + # TODO: ideally, archiving may redirect to a view of the archived entries + # + # this can be achieved in one of the following ways: + # + # * filter by the archived entry ids + # * get_entries(entries=...) does not exist + # * if there are a lot of entries, the query string may be to big + # * filter by entry source – get_entries(source=...) + # * this will not include entries that already have a source + # * idem for original_feed_url + # * filter by entry id prefix – reader:archived?feed=...& + # * get_entries(entry_id_prefix=...) does not exist + # * by far the most correct + # + # until we figure this out, leaving return type to None + + +def _make_archived_entry_id(feed_url: str, entry: tuple[str, str]) -> str: + query = urlencode({'feed': entry[0], 'entry': entry[1]}) + return f"{feed_url}?{query}" diff --git a/tests/test_utils.py b/tests/test__utils.py similarity index 100% rename from tests/test_utils.py rename to tests/test__utils.py diff --git a/tests/test_reader_utils.py b/tests/test_reader_utils.py new file mode 100644 index 00000000..e655f069 --- /dev/null +++ b/tests/test_reader_utils.py @@ -0,0 +1,59 @@ +from unittest.mock import Mock + +import pytest + +from fakeparser import Parser +from reader import EntryNotFoundError +from reader.utils import archive_entries + + +def test_archive_entries(reader): + reader._parser = parser = Parser() + reader.copy_entry = Mock(wraps=reader.copy_entry) + + feed = parser.feed(1) + one = parser.entry(1, 'one', title='one') + two = parser.entry(1, '&?:/', title='not URL safe') + reader.add_feed(feed) + reader.update_feeds() + + # archive an entry, archived does not exist + + reader.copy_entry.reset_mock() + archive_entries(reader, [one]) + + assert len(reader.copy_entry.call_args_list) == 1 + assert {e.resource_id + (e.title,) for e in reader.get_entries()} == { + ('1', 'one', 'one'), + ('1', '&?:/', 'not URL safe'), + ('reader:archived', 'reader:archived?feed=1&entry=one', 'one'), + } + archived = reader.get_feed('reader:archived') + assert archived.updates_enabled is False + assert archived.user_title == 'Archived' + + # archive two entries (one already archived), archived exists + + one = parser.entry(1, 'one', title='new one') + reader.update_feeds() + + reader.copy_entry.reset_mock() + archive_entries(reader, [one, two]) + + # 3 because one is copied (exists error), deleted, and then copied again + assert len(reader.copy_entry.call_args_list) == 3 + assert {e.resource_id + (e.title,) for e in reader.get_entries()} == { + ('1', 'one', 'new one'), + ('1', '&?:/', 'not URL safe'), + ('reader:archived', 'reader:archived?feed=1&entry=one', 'new one'), + ( + 'reader:archived', + 'reader:archived?feed=1&entry=%26%3F%3A%2F', + 'not URL safe', + ), + } + + # archive inexistent entry + + with pytest.raises(EntryNotFoundError): + archive_entries(reader, [('1', 'inexistent')])